爬蟲其實就是一個自動提取網頁的程式
程式基本運作:Url開始-->分析獲取數據&找到Url-->遞迴下去-->結束
分析獲取數據運作:下載html--解析獲取數據--數據保存
數據為王:抓小說數據,做個內容站;
電影/動漫下載站
抓圖片
政府的公開招標數據,每天匯集這些數據
爬蟲的正義性問題:不違法、不問自取謂之偷
robots協議:君子的協定(360流氓),道德防線
每個網站會堤供robots.txt,來說明自己的哪些路徑允許爬資料
防=>請求檢測header
攻=>爬蟲去都模擬一下
防=>用戶登入
攻=>請求的時候帶上cookie
防:因為爬蟲的訪問頻率會很高,因此可以將訪問頻率高的IP加入黑名單,或者返回驗證碼
攻:可以透過以下方式產生多個ip(ADSL撥號/168偽裝IP/代理IP)
攻:破解驗證碼,有開源的圖片識別程式(OCR/打碼平台)
防:數據透過JS動態加載;將資料轉成圖片;透過JS收集用戶操作訊息,然後回傳伺服器;用戶控件(可以收集更多信息)
以上都是可以搞定的,道高一尺魔高一丈
手機板的請求抓取:裝模擬器=>電腦抓包Fiddler
語法產生器:http://tool.sufeinet.com/HttpHelper.aspx
string html = string.Empty;
HttpWebRequest request = HttpWebRequest.Create(url) as HttpWebRequest;//模擬請求
request.Timeout = 30 * 1000;//設置30s超時
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36";//pc瀏覽器
//request.UserAgent = "Ruanmou Crawler";
//request.UserAgent = "Mozilla / 5.0(iPhone; CPU iPhone OS 7_1_2 like Mac OS X) App leWebKit/ 537.51.2(KHTML, like Gecko) Version / 7.0 Mobile / 11D257 Safari / 9537.53";//手機板瀏覽器
request.ContentType = "text/html; charset=utf-8";// "text/html;charset=gbk";//
request.Host = "www.jd.com";
request.Headers.Add("Cookie", @"newUserFlag=1; guid=YFT7C9E6TMFU93FKFVEN7TEA5HTCF5DQ26HZ; gray=959782; cid=av9kKvNkAPJ10JGqM_rB_vDhKxKM62PfyjkB4kdFgFY5y5VO; abtest=31; _ga=GA1.2.334889819.1425524072; grouponAreaId=37; provinceId=20; search_showFreeShipping=1; rURL=http%3A%2F%2Fsearch.yhd.com%2Fc0-0%2Fkiphone%2F20%2F%3Ftp%3D1.1.12.0.73.Ko3mjRR-11-FH7eo; aut=5GTM45VFJZ3RCTU21MHT4YCG1QTYXERWBBUFS4; ac=57265177%40qq.com; msessionid=H5ACCUBNPHMJY3HCK4DRF5VD5VA9MYQW; gc=84358431%2C102362736%2C20001585%2C73387122; tma=40580330.95741028.1425524063040.1430288358914.1430790348439.9; tmd=23.40580330.95741028.1425524063040.; search_browse_history=998435%2C1092925%2C32116683%2C1013204%2C6486125%2C38022757%2C36224528%2C24281304%2C22691497%2C26029325; detail_yhdareas=""; cart_cookie_uuid=b64b04b6-fca7-423b-b2d1-ff091d17e5e5; gla=20.237_0_0; JSESSIONID=14F1F4D714C4EE1DD9E11D11DDCD8EBA; wide_screen=1; linkPosition=search");
//request.Headers.Add("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
//request.Headers.Add("Accept-Encoding", "gzip, deflate, sdch");
//request.Headers.Add("Referer", "http://list.yhd.com/c0-0/b/a-s1-v0-p1-price-d0-f0-m1-rt0-pid-mid0-kiphone/");
request.Method = "GET";
//Encoding enc = Encoding.GetEncoding("GB2312"); // 如果是亂碼就改成 utf-8 / GB2312
#region Post
//int sort = 2;//人數
//string dataString = string.Format("k={0}&n=24&st={1}&iso=0&src=1&v=4093&p={2}&isRecommend=false&city_id=0&from=1&ldw=1361580739", keyword, sort, 1);
//Encoding encoding = Encoding.UTF8;//根據網站編碼自訂義
//byte[] postData = encoding.GetBytes(dataString);
//request.ContentLength = postData.Length;
//Stream requestStream = request.GetRequestStream();
//requestStream.Write(postData, 0, postData.Length);
#endregion
Encoding enc = Encoding.UTF8;//.GetEncoding("GB2312");
using (HttpWebResponse response = request.GetResponse() as HttpWebResponse)//發起請求
{
if (response.StatusCode != HttpStatusCode.OK)
{
logger.Warn(string.Format("抓取{0}地址返回失敗,response.StatusCode為{1}", url, response.StatusCode));
}
else
{
try
{
StreamReader sr = new StreamReader(response.GetResponseStream(), enc);
html = sr.ReadToEnd();//讀取數據
sr.Close();
}
catch (Exception ex)
{
logger.Error(string.Format($"DownloadHtml抓取{url}失敗"), ex);
html = null;
}
}
}
return html
log4net.cfg設定檔
<?xml version="1.0" encoding="utf-8"?>
<log4net>
<!-- Define some output appenders -->
<appender name="rollingAppender" type="log4net.Appender.RollingFileAppender">
<file value="log\log.txt" />
<!--追加日誌內容-->
<appendToFile value="true" />
<!--防止多線程時不能寫Log,官方說線程非安全-->
<lockingModel type="log4net.Appender.FileAppender+MinimalLock" />
<!--可以為:Once|Size|Date|Composite-->
<!--Composite為=為Size和Date的組合-->
<rollingStyle value="Composite" />
<!--當備份文件時,為文件名加的後綴-->
<datePattern value="yyyyMMdd.TXT" />
<!--日誌最大個數,都是最新的-->
<!--rollingStyle節點為Size時,只能有value個日誌-->
<!--rollingStyle節點為Composite時,每天有value個日誌-->
<maxSizeRollBackups value="20" />
<!--可用的單位:KB|MB|GB-->
<maximumFileSize value="3MB" />
<!--設置為true,當前最新日誌文件名永遠為file節中的名字-->
<staticLogFileName value="true" />
<!--輸出級別在INFO和ERROR之間的日誌-->
<filter type="log4net.Filter.LevelRangeFilter">
<param name="LevelMin" value="INFO" />
<param name="LevelMax" value="FATAL" />
</filter>
<layout type="log4net.Layout.PatternLayout">
<conversionPattern value="%date [%thread] %-5level %logger - %message%newline"/>
</layout>
</appender>
<!-- levels: OFF > FATAL > ERROR > WARN > INFO > DEBUG > ALL -->
<root>
<priority value="ALL"/>
<level value="ALL"/>
<appender-ref ref="rollingAppender" />
</root>
</log4net>
Log Level的等級
OFF > FATAL > ERROR > WARN > INFO > DEBUG > ALL
初始化
static Logger()
{
XmlConfigurator.Configure(new FileInfo(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "CfgFiles\\log4net.cfg.xml")));
ILog Log = LogManager.GetLogger(typeof(Logger));
Log.Info("系統初始化Logger模塊");
}
private ILog loger = null;
public Logger(Type type)
{
loger = LogManager.GetLogger(type);
}
紀錄log的方式
使用log選擇要紀錄的等級
loger.Error(msg, ex);
loger.Warn(msg);
loger.Info(msg);
loger.Debug(msg);
通常在解析HttpWebRequest下載回來的HTML資料有兩種方式:
什麼是XPath
XPath (XML Path Language) 是一種用來尋找XML文件中某個節點(node)位置的查詢語言。
XPath使用類似路徑的語法來尋找節點。
XPath一共有七種節點:element, attribute, text, namespace, processing-instruction, comment, document
XML文件是由許多節點組成的樹狀結構,最上層的結點稱作root element
public void Crawl()
{
//下載HTML
string html = HttpHelper.DownloadHtml("https://www.jd.com/allSort.aspx");
if (string.IsNullOrEmpty(html))
{
//需要重試。記錄下來
}
HtmlDocument document = new HtmlDocument();
document.LoadHtml(html);
{
//Xpath路徑
string secondPath = "//dl/dt/a";
HtmlNodeCollection nodeList = document.DocumentNode.SelectNodes(secondPath);//找多個節點
if (nodeList != null)
{
foreach (HtmlNode node in nodeList)
{
string url = node.Attributes["href"].Value;
string name = node.InnerText;
}
}
}
}
本篇已同步發表至個人部落格
https://moushih.com/2022ithome26/